home
***
CD-ROM
|
disk
|
FTP
|
other
***
search
/
EnigmA Amiga Run 1995 November
/
EnigmA AMIGA RUN 02 (1995)(G.R. Edizioni)(IT)[!][issue 1995-11][Skylink CD].iso
/
earcd
/
misc
/
nnn.lha
/
nnn1.35
/
src
/
NetWriteConfig.c
< prev
next >
Wrap
C/C++ Source or Header
|
1995-03-22
|
3KB
|
98 lines
/*
* $Id: NetWriteConfig.c 1.9 1995/03/22 01:03:46 daltern Exp $
*
* Function NetWriteConfig
* Programmer Nicholas d'Alterio
* Date 14/03/95
*
* Synopsis: This function writes out the neural network configuration
* file to stdout.
*
* $Log: NetWriteConfig.c $
* Revision 1.9 1995/03/22 01:03:46 daltern
* Adapted to write each layers weights on a different line
*
* Revision 1.8 1995/03/20 23:39:36 daltern
* Added printing of gain factor to config file
*
* Revision 1.7 1995/03/17 18:15:03 projects
* Changed so that weights ending on a bias are not printed
*
* Revision 1.6 1995/03/17 17:54:09 projects
* Ammended Weights writing section to print only elements used
* in line with change in v1.22 nnn.c
*
* Revision 1.5 1995/03/16 19:28:07 daltern
* Now prints extra parameter - momentum factor
*
* Revision 1.3 1995/03/16 17:31:56 daltern
* Stopped the printing of states to config file
*
* Revision 1.2 1995/03/14 22:59:32 daltern
* General clean up and commenting
*
*
*/
#include "Neural.h"
void NetWriteConfig(NET netI, VECTOR vecS, float ***Weight, float seed,
float learn_rate, float mom_fac, float accuracy, float vecs_correct, float gain )
{
register int i, j, k, cur_vec;
fprintf(stdout,"# Num Learning Momentum Gain Random Accuracy Percentage\n");
fprintf(stdout,"# Layers Rate Factor Seed Correct\n");
fprintf(stdout,"\t%d ", netI.NumLayers);
fprintf(stdout,"%f " , learn_rate);
fprintf(stdout,"%f ", mom_fac );
fprintf(stdout,"%f ", gain );
fprintf(stdout,"%f " , seed);
fprintf(stdout,"%f " , accuracy);
fprintf(stdout,"%f\n", vecs_correct);
/*
* Write out the layer sizes.
*/
fprintf(stdout,"%s\n", DELIMITER[2]);
for ( i = 0; i < netI.NumLayers; i++ )
fprintf(stdout,"%d\n", netI.LayerSize[i] );
/*
* Write out the Weights section
*/
fprintf(stdout,"%s\n",DELIMITER[0]);
for ( i = 0; i < (netI.NumLayers-1); i++ ) {
for ( j = 0; j < (netI.LayerSize[i]+1); j++) {
for ( k = 1; k < (netI.LayerSize[i+1]+1); k++) {
fprintf(stdout,"%f ", Weight[i][j][k]);
} /* end for k */
} /* end for j */
fprintf(stdout,"\n");
} /* end for i */
/*
* Write out the vectors
*/
fprintf(stdout,"%s\n",DELIMITER[1]);
for (cur_vec = 0; cur_vec < vecS.NumVecs; cur_vec++) {
for (i = 0; i < (netI.LayerSize[0]+1); i++ )
fprintf(stdout,"%f ", vecS.InVec[cur_vec][i]);
for ( i = 0; i < netI.LayerSize[netI.NumLayers-1]; i++)
fprintf(stdout,"%f ", vecS.OutVec[cur_vec][i]);
fprintf(stdout,"\n");
} /* end for cur_vec */
return;
} /* end function NetWriteConfig */